In [1]:
import json
import copy
import time
import random
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from torchsummary import summary
In [2]:
from nmfd_gnn import NMFD_GNN

1: set parameters¶

In [3]:
print (torch.cuda.is_available())
device = torch.device("cuda:0")
random_seed = 42
random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
r = random.random
True
In [4]:
#1.1: settings
M = 20                       #number of time interval in a window
missing_ratio = 0.50
file_name = "m_" + str(M) + "_missing_" + str(int(missing_ratio*100))
print (file_name)

#1.2: hyperparameters
num_epochs, batch_size, learning_rate = 200, 16, 0.001
beta_flow, beta_occ, beta_phy = 1.0, 1.0, 0.1
batch_size_vt = 16  #batch size for evaluation and test
delta_ratio = 0.1   #the ratio of delta in the standard deviation of flow

hyper = {"n_e": num_epochs, "b_s": batch_size, "b_s_vt": batch_size_vt, "l_r": learning_rate,\
         "beta_f": beta_flow, "beta_o": beta_occ, "beta_p": beta_phy, "delta_ratio": delta_ratio}

gnn_dim_1, gnn_dim_2, gnn_dim_3, lstm_dim = 2, 128, 128, 128
p_dim = 10    #column dimension of L1, L2
c_k = 5.5     #meter, the sum of loop width and uniform vehicle length. based on Gero and Daganzo 2008.
theta_ini = [-2.879, 5.207, -2.473, 1.722, 3.619] 

hyper_model = {"g_dim_1": gnn_dim_1, "g_dim_2": gnn_dim_2, "g_dim_3": gnn_dim_3, "l_dim": lstm_dim,\
               "p_dim": p_dim, "c_k": c_k, "theta_ini": theta_ini}
max_no_decrease = 30

#1.3: set paths
root_path = "/home/umni2/a/umnilab/users/xue120/umni4/2023_mfd_traffic_london/"
file_path = root_path + "2_prepare_data/" + file_name + "/"
train_path, vali_path, test_path =\
    file_path + "train.json", file_path + "vali.json", file_path + "test.json"
sensor_id_path = file_path + "sensor_id_order.json"
sensor_adj_path = file_path + "sensor_adj.json"
mean_std_path = file_path + "mean_std.json"
m_20_missing_50

2: visualization¶

In [5]:
def visualize_train_loss(total_phy_flow_occ_loss):
    plt.figure(figsize=(4,3), dpi=75)
    t_p_f_o_l = np.array(total_phy_flow_occ_loss)
    e_loss, p_loss, f_loss, o_loss = t_p_f_o_l[:,0], t_p_f_o_l[:,1], t_p_f_o_l[:,2], t_p_f_o_l[:,3]
    x = range(len(e_loss))
    plt.plot(x, p_loss, linewidth=1, label = "phy loss")
    plt.plot(x, f_loss, linewidth=1, label = "flow loss")
    plt.plot(x, o_loss, linewidth=1, label = "occ loss")
    plt.legend()
    plt.title('Loss decline on train')
    plt.xlabel('Epoch')
    plt.ylabel('Loss')
    plt.savefig(file_name + '/' + 'train_loss.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_flow_loss(vali_f_mae, test_f_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_f_mae))    
    plt.plot(x, vali_f_mae, linewidth=1, label="Validate")
    plt.plot(x, test_f_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of flow on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE (veh/h)')
    plt.savefig(file_name + '/' + 'flow_mae.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_occ_loss(vali_o_mae, test_o_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_o_mae))    
    plt.plot(x, vali_o_mae, linewidth=1, label="Validate")
    plt.plot(x, test_o_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of occupancy on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE')
    plt.savefig(file_name + '/' + 'occ_mae.png',bbox_inches = 'tight')
    plt.show()

3: compute the error¶

In [6]:
def MAELoss(yhat, y):
    return float(torch.mean(torch.div(torch.abs(yhat-y), 1)))

def RMSELoss(yhat, y):
    return float(torch.sqrt(torch.mean((yhat-y)**2)))

def vali_test(model, f, f_mask, o, o_mask, f_o_mean_std, b_s_vt):    
    flow_std, occ_std, n = f_o_mean_std[1], f_o_mean_std[3], len(f)
    f_mae_list, f_rmse_list, o_mae_list, o_rmse_list, num_list = list(), list(), list(), list(), list()
    for i in range(0, n, b_s_vt):
        s, e = i, np.min([i+b_s_vt, n])
        num_list.append(e-s)
        bf, bo, bf_mask, bo_mask = f[s: e], o[s: e], f_mask[s: e], o_mask[s: e]  
        bf_hat, bo_hat, bq_hat, bq_theta = model.run(bf_mask, bo_mask)
        bf_hat, bo_hat = bf_hat.cpu(), bo_hat.cpu()
        bf_mae, bf_rmse = MAELoss(bf_hat, bf)*flow_std, RMSELoss(bf_hat, bf)*flow_std
        bo_mae, bo_rmse = MAELoss(bo_hat, bo)*occ_std, RMSELoss(bo_hat, bo)*occ_std
        f_mae_list.append(bf_mae)
        f_rmse_list.append(bf_rmse)
        o_mae_list.append(bo_mae)
        o_rmse_list.append(bo_rmse)
    f_mae, o_mae = np.dot(f_mae_list, num_list)/n, np.dot(o_mae_list, num_list)/n
    f_rmse = np.sqrt(np.dot(np.multiply(f_rmse_list, f_rmse_list), num_list)/n)
    o_rmse = np.sqrt(np.dot(np.multiply(o_rmse_list, o_rmse_list), num_list)/n)
    return f_mae, f_rmse, o_mae, o_rmse

def evaluate(model, vt_f, vt_o, vt_f_m, vt_o_m, f_o_mean_std, b_s_vt): #vt: vali_test
    vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse  =\
         vali_test(model, vt_f, vt_f_m, vt_o, vt_o_m, f_o_mean_std, b_s_vt)
    return vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse

4: train¶

In [7]:
import torch
In [8]:
#4.1: one training epoch
def train_epoch(model, opt, criterion, train_f_x, train_f_y, train_o_x, train_o_y, hyper, flow_std_squ, delta): 
    #f: flow; o: occupancy
    model.train()
    losses, p_losses, f_losses, o_losses = list(), list(), list(), list()
    
    beta_f, beta_o, beta_p, b_s = hyper["beta_f"], hyper["beta_o"], hyper["beta_p"], hyper["b_s"]
    n = len(train_f_x)
    print ("# batch: ", int(n/b_s))   
    
    for i in range(0, n-b_s, b_s):
        time1 = time.time()
        x_f_batch, y_f_batch = train_f_x[i: i+b_s], train_f_y[i: i+b_s]   
        x_o_batch, y_o_batch = train_o_x[i: i+b_s], train_o_y[i: i+b_s]

        opt.zero_grad() 
        y_f_hat, y_o_hat, q_hat, q_theta = model.run(x_f_batch, x_o_batch)
        
        #p_loss = criterion(q_hat, q_theta).cpu()                #physical loss 
        #p_loss = p_loss/flow_std_squ
        
        #hinge loss
        q_gap = q_hat - q_theta       
        delta_gap = torch.ones(q_gap.shape, device=device)*delta
        zero_gap = torch.zeros(q_gap.shape, device=device)            #(n, m)
        hl_loss = torch.max(q_gap-delta_gap, zero_gap) + torch.max(-delta_gap-q_gap, zero_gap) 
        hl_loss = hl_loss/flow_std_squ
        p_loss = criterion(hl_loss, zero_gap).cpu()            #(n, m)
        f_loss = criterion(y_f_hat.cpu(), y_f_batch)              #data loss of flow
        o_loss = criterion(y_o_hat.cpu(), y_o_batch)              #data loss of occupancy
        
        loss = beta_f*f_loss + beta_o*o_loss + beta_p*p_loss
        
        loss.backward()
        opt.step()
        losses.append(loss.data.numpy())
        p_losses.append(p_loss.data.numpy())
        f_losses.append(f_loss.data.numpy())
        o_losses.append(o_loss.data.numpy())
        
        if i % (64*b_s) == 0:
            print ("i_batch: ", i/b_s)
            print ("the loss for this batch: ", loss.data.numpy())
            print ("flow loss", f_loss.data.numpy())
            print ("occ loss", o_loss.data.numpy())
            time2 = time.time()
            print ("time for this batch", time2-time1)
            print ("----------------------------------")
        n_loss = float(len(losses)+0.000001)
        aver_loss = sum(losses)/n_loss
        aver_p_loss = sum(p_losses)/n_loss
        aver_f_loss = sum(f_losses)/n_loss
        aver_o_loss = sum(o_losses)/n_loss
    return aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss

#4.2: all train epochs
def train_process(model, criterion, train, vali, test, hyper, f_o_mean_std):
    total_phy_flow_occ_loss = list()
    
    n_mse_flow_occ = 0 #mse(flow) + mse(occ) for validation sets.
    f_std = f_o_mean_std[1]
    
    vali_f, vali_o = vali["flow"], vali["occupancy"] 
    vali_f_m, vali_o_m = vali["flow_mask"].to(device), vali["occupancy_mask"].to(device) 
    test_f, test_o = test["flow"], test["occupancy"] 
    test_f_m, test_o_m = test["flow_mask"].to(device), test["occupancy_mask"].to(device) 
    
    l_r, n_e = hyper["l_r"], hyper["n_e"]
    opt = optim.Adam(model.parameters(), l_r, betas = (0.9,0.999), weight_decay=0.0001)
    opt_scheduler = torch.optim.lr_scheduler.MultiStepLR(opt, milestones=[150])
    
    print ("# epochs ", n_e)
    r_vali_f_mae, r_vali_o_mae, r_test_f_mae, r_test_o_mae = list(), list(), list(), list()
    r_vali_f_rmse, r_vali_o_rmse, r_test_f_rmse, r_test_o_rmse = list(), list(), list(), list()
    
    flow_std_squ = np.power(f_std, 2)
    
    no_decrease = 0
    for i in range(n_e):
        print ("----------------an epoch starts-------------------")
        #time1_s = time.time()
        
        time_s = time.time()
        print ("i_epoch: ", i)
        n_train = len(train["flow"])
        number_list = copy.copy(list(range(n_train)))
        random.shuffle(number_list, random = r)
        shuffle_idx = torch.tensor(number_list)
        train_x_f, train_y_f = train["flow_mask"][shuffle_idx], train["flow"][shuffle_idx]
        train_x_o, train_y_o = train["occupancy_mask"][shuffle_idx], train["occupancy"][shuffle_idx] 
        
        delta = hyper["delta_ratio"] * f_std
        aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss =\
            train_epoch(model, opt, criterion, train_x_f.to(device), train_y_f,\
                        train_x_o.to(device), train_y_o, hyper, flow_std_squ, delta)
        opt_scheduler.step()
        
        total_phy_flow_occ_loss.append([aver_loss, aver_p_loss, aver_f_loss, aver_o_loss])
        print ("train loss for this epoch: ", round(aver_loss, 6))
        
        #evaluate
        b_s_vt = hyper["b_s_vt"]
        vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
            evaluate(model, vali_f, vali_o, vali_f_m, vali_o_m, f_o_mean_std, b_s_vt)
        test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
            evaluate(model, test_f, test_o, test_f_m, test_o_m, f_o_mean_std, b_s_vt)  
        
        r_vali_f_mae.append(vali_f_mae)
        r_test_f_mae.append(test_f_mae)
        r_vali_o_mae.append(vali_o_mae)
        r_test_o_mae.append(test_o_mae)
        r_vali_f_rmse.append(vali_f_rmse)
        r_test_f_rmse.append(test_f_rmse)
        r_vali_o_rmse.append(vali_o_rmse)
        r_test_o_rmse.append(test_o_rmse)
        
        visualize_train_loss(total_phy_flow_occ_loss)
        visualize_flow_loss(r_vali_f_mae, r_test_f_mae)
        visualize_occ_loss(r_vali_o_mae, r_test_o_mae)
        time_e = time.time()
        print ("time for this epoch", time_e - time_s)
        
        performance = {"train": total_phy_flow_occ_loss,\
                  "vali": [r_vali_f_mae, r_vali_f_rmse, r_vali_o_mae, r_vali_o_rmse],\
                  "test": [r_test_f_mae, r_test_f_rmse, r_test_o_mae, r_test_o_rmse]}
        subfile =  open(file_name + '/' + 'performance'+'.json','w')
        json.dump(performance, subfile)
        subfile.close()
        
        #early stop
        flow_std, occ_std = f_o_mean_std[1], f_o_mean_std[3]
        norm_f_rmse, norm_o_rmse = vali_f_rmse/flow_std, vali_o_rmse/occ_std
        norm_sum_mse = norm_f_rmse*norm_f_rmse + norm_o_rmse*norm_o_rmse
        
        if n_mse_flow_occ > 0:
            min_until_now = min([min_until_now, norm_sum_mse])
        else:
            min_until_now = 1000000.0  
        if norm_sum_mse > min_until_now:
            no_decrease = no_decrease+1
        else:
            no_decrease = 0
        if no_decrease == max_no_decrease:
            print ("Early stop at the " + str(i+1) + "-th epoch")
            return total_phy_flow_occ_loss, model 
        n_mse_flow_occ = n_mse_flow_occ + 1
        
        print ("No_decrease: ", no_decrease)
    return total_phy_flow_occ_loss, model    

5: prepare tensors¶

In [9]:
def tensorize(train_vali_test):
    result = dict()
    result["flow"] = torch.tensor(train_vali_test["flow"]) 
    result["flow_mask"] = torch.tensor(train_vali_test["flow_mask"])     
    result["occupancy"] = torch.tensor(train_vali_test["occupancy"]) 
    result["occupancy_mask"] = torch.tensor(train_vali_test["occupancy_mask"]) 
    return result

def normalize_flow_occ(tvt, f_o_mean_std):  #tvt: train, vali, test
    #flow
    f_mean, f_std = f_o_mean_std[0], f_o_mean_std[1]
    f_mask, f = tvt["flow_mask"], tvt["flow"]
    tvt["flow_mask"] = ((np.array(f_mask)-f_mean)/f_std).tolist()
    tvt["flow"] = ((np.array(f)-f_mean)/f_std).tolist()
    
    #occ
    o_mean, o_std = f_o_mean_std[2], f_o_mean_std[3]
    o_mask, o = tvt["occupancy_mask"], tvt["occupancy"]
    tvt["occupancy_mask"] = ((np.array(o_mask)-o_mean)/o_std).tolist()
    tvt["occupancy"] = ((np.array(o)-o_mean)/o_std).tolist()   
    return tvt

def transform_distance(d_matrix):
    sigma, n_row, n_col = np.std(d_matrix), len(d_matrix), len(d_matrix[0])
    sigma_square = sigma*sigma
    for i in range(n_row):
        for j in range(n_col):
            d_i_j = d_matrix[i][j]
            d_matrix[i][j] = np.exp(0.0-10000.0*d_i_j*d_i_j/sigma_square)
    return d_matrix

def load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path):
    mean_std = json.load(open(mean_std_path))
    f_mean, f_std, o_mean, o_std =\
        mean_std["f_mean"], mean_std["f_std"], mean_std["o_mean"], mean_std["o_std"]
    f_o_mean_std = [f_mean, f_std, o_mean, o_std]
    
    train = json.load(open(train_path))
    vali = json.load(open(vali_path))
    test = json.load(open(test_path))
    adj = json.load(open(sensor_adj_path))["adj"]
    n_sensor = len(train["flow"][0])    
    
    train = tensorize(normalize_flow_occ(train, f_o_mean_std))
    vali = tensorize(normalize_flow_occ(vali, f_o_mean_std))
    test = tensorize(normalize_flow_occ(test, f_o_mean_std))

    adj = torch.tensor(transform_distance(adj), device=device).float()   
    
    df_sensor_id = json.load(open(sensor_id_path))
    sensor_length = [0.0 for i in range(n_sensor)]
    for sensor in df_sensor_id:
        sensor_length[df_sensor_id[sensor][0]] = df_sensor_id[sensor][3]
        
    return train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length

6: main¶

In [10]:
#6.1 load the data
time1 = time.time()
train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length =\
    load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path)
time2 = time.time()
print (time2-time1)
14.948076486587524
In [11]:
print (len(train["flow"]))
print (len(vali["flow"]))
print (len(test["flow"]))
print (f_o_mean_std)
1536
499
500
[425.68492811748513, 254.84583261239152, 0.1814023556701015, 0.18315625109655478]
In [12]:
model = NMFD_GNN(n_sensor, M, hyper_model, f_o_mean_std, sensor_length, adj).to(device)   
cri = nn.MSELoss() 
In [13]:
#6.2: train the model
total_phy_flow_occ_loss, trained_model = train_process(model, cri, train, vali, test, hyper, f_o_mean_std)
# epochs  200
----------------an epoch starts-------------------
i_epoch:  0
# batch:  96
i_batch:  0.0
the loss for this batch:  1.7134186
flow loss 1.0990472
occ loss 0.6143677
time for this batch 0.8337509632110596
----------------------------------
i_batch:  64.0
the loss for this batch:  0.51779646
flow loss 0.25966045
occ loss 0.25813207
time for this batch 0.3999030590057373
----------------------------------
train loss for this epoch:  0.666644
time for this epoch 49.947651386260986
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  1
# batch:  96
i_batch:  0.0
the loss for this batch:  0.38923532
flow loss 0.20001921
occ loss 0.18921247
time for this batch 0.3346080780029297
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3179913
flow loss 0.15609379
occ loss 0.16189373
time for this batch 0.4038684368133545
----------------------------------
train loss for this epoch:  0.34138
time for this epoch 48.5993332862854
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  2
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2871922
flow loss 0.14704569
occ loss 0.14014304
time for this batch 0.4662306308746338
----------------------------------
i_batch:  64.0
the loss for this batch:  0.34534413
flow loss 0.14378327
occ loss 0.20155667
time for this batch 0.3985617160797119
----------------------------------
train loss for this epoch:  0.290797
time for this epoch 47.62117147445679
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  3
# batch:  96
i_batch:  0.0
the loss for this batch:  0.25905067
flow loss 0.123558685
occ loss 0.13548848
time for this batch 0.3422975540161133
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22843058
flow loss 0.11352128
occ loss 0.11490612
time for this batch 0.40105414390563965
----------------------------------
train loss for this epoch:  0.265842
time for this epoch 48.24851894378662
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  4
# batch:  96
i_batch:  0.0
the loss for this batch:  0.24675755
flow loss 0.11425608
occ loss 0.13249832
time for this batch 0.3576655387878418
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23664472
flow loss 0.10530156
occ loss 0.13133985
time for this batch 0.38855624198913574
----------------------------------
train loss for this epoch:  0.247305
time for this epoch 46.478270292282104
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  5
# batch:  96
i_batch:  0.0
the loss for this batch:  0.24319184
flow loss 0.1063535
occ loss 0.13683447
time for this batch 0.3827507495880127
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20429525
flow loss 0.08870783
occ loss 0.11558418
time for this batch 0.3249962329864502
----------------------------------
train loss for this epoch:  0.236366
time for this epoch 48.09101986885071
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  6
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2125458
flow loss 0.09404563
occ loss 0.118497126
time for this batch 0.36347293853759766
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20871449
flow loss 0.093668155
occ loss 0.115042746
time for this batch 0.42270946502685547
----------------------------------
train loss for this epoch:  0.228613
time for this epoch 48.45596957206726
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  7
# batch:  96
i_batch:  0.0
the loss for this batch:  0.22367454
flow loss 0.09263031
occ loss 0.13104044
time for this batch 0.38973116874694824
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20158254
flow loss 0.08903083
occ loss 0.11254845
time for this batch 0.41232943534851074
----------------------------------
train loss for this epoch:  0.221171
time for this epoch 48.989471673965454
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  8
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19178802
flow loss 0.08742973
occ loss 0.10435505
time for this batch 0.3988370895385742
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19351037
flow loss 0.08214871
occ loss 0.111358225
time for this batch 0.4278287887573242
----------------------------------
train loss for this epoch:  0.21389
time for this epoch 48.035879373550415
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  9
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2522364
flow loss 0.10751678
occ loss 0.14471559
time for this batch 0.3308074474334717
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21771668
flow loss 0.08832654
occ loss 0.12938654
time for this batch 0.4020664691925049
----------------------------------
train loss for this epoch:  0.212335
time for this epoch 47.45458745956421
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  10
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2125954
flow loss 0.083210036
occ loss 0.12938206
time for this batch 0.3433499336242676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17844406
flow loss 0.07206791
occ loss 0.10637313
time for this batch 0.42790913581848145
----------------------------------
train loss for this epoch:  0.205483
time for this epoch 48.00741362571716
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  11
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20692211
flow loss 0.08502702
occ loss 0.1218913
time for this batch 0.403548002243042
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19474114
flow loss 0.07469258
occ loss 0.12004515
time for this batch 0.4286806583404541
----------------------------------
train loss for this epoch:  0.206505
time for this epoch 47.07732582092285
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  12
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19524421
flow loss 0.08167527
occ loss 0.11356524
time for this batch 0.35141539573669434
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21691997
flow loss 0.081834264
occ loss 0.13508144
time for this batch 0.3849637508392334
----------------------------------
train loss for this epoch:  0.201951
time for this epoch 48.33201241493225
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  13
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17980215
flow loss 0.069509044
occ loss 0.11029
time for this batch 0.37688612937927246
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20331323
flow loss 0.083243705
occ loss 0.12006552
time for this batch 0.38703203201293945
----------------------------------
train loss for this epoch:  0.200704
time for this epoch 48.5847008228302
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  14
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2008001
flow loss 0.07649641
occ loss 0.1243005
time for this batch 0.36525917053222656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17692864
flow loss 0.07736609
occ loss 0.09955911
time for this batch 0.38483119010925293
----------------------------------
train loss for this epoch:  0.197633
time for this epoch 46.06301426887512
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  15
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2212282
flow loss 0.081321865
occ loss 0.13990197
time for this batch 0.3715963363647461
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18258475
flow loss 0.072188266
occ loss 0.110392846
time for this batch 0.42764735221862793
----------------------------------
train loss for this epoch:  0.196878
time for this epoch 46.886202573776245
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  16
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20658231
flow loss 0.07829755
occ loss 0.12828107
time for this batch 0.3718852996826172
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16844028
flow loss 0.06877511
occ loss 0.099661864
time for this batch 0.3790750503540039
----------------------------------
train loss for this epoch:  0.195212
time for this epoch 46.04958462715149
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  17
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17858073
flow loss 0.067416884
occ loss 0.11116037
time for this batch 0.3593132495880127
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16646208
flow loss 0.071424484
occ loss 0.095034376
time for this batch 0.36090850830078125
----------------------------------
train loss for this epoch:  0.195464
time for this epoch 47.7037672996521
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  18
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19041872
flow loss 0.07303891
occ loss 0.11737645
time for this batch 0.3524601459503174
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15749629
flow loss 0.06690348
occ loss 0.090589754
time for this batch 0.40334177017211914
----------------------------------
train loss for this epoch:  0.192508
time for this epoch 48.03777837753296
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  19
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18128638
flow loss 0.073173806
occ loss 0.10810889
time for this batch 0.3922719955444336
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17383336
flow loss 0.07140625
occ loss 0.10242397
time for this batch 0.3997964859008789
----------------------------------
train loss for this epoch:  0.189634
time for this epoch 48.6640362739563
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  20
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20526285
flow loss 0.073079385
occ loss 0.13217965
time for this batch 0.3795199394226074
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2203908
flow loss 0.077268474
occ loss 0.14311846
time for this batch 0.399810791015625
----------------------------------
train loss for this epoch:  0.188155
time for this epoch 47.71439528465271
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  21
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19079423
flow loss 0.071277246
occ loss 0.11951289
time for this batch 0.3458836078643799
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16611753
flow loss 0.069429114
occ loss 0.09668515
time for this batch 0.42494726181030273
----------------------------------
train loss for this epoch:  0.187404
time for this epoch 48.479459047317505
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  22
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1685222
flow loss 0.072397284
occ loss 0.09612149
time for this batch 0.36983513832092285
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14372689
flow loss 0.0640567
occ loss 0.07966716
time for this batch 0.35892534255981445
----------------------------------
train loss for this epoch:  0.186807
time for this epoch 46.38121199607849
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  23
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13217025
flow loss 0.059695
occ loss 0.07247291
time for this batch 0.3704671859741211
----------------------------------
i_batch:  64.0
the loss for this batch:  0.207773
flow loss 0.07253695
occ loss 0.13523234
time for this batch 0.3767087459564209
----------------------------------
train loss for this epoch:  0.186299
time for this epoch 47.02288007736206
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  24
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17570573
flow loss 0.073237315
occ loss 0.10246532
time for this batch 0.3819608688354492
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16633452
flow loss 0.07047004
occ loss 0.0958611
time for this batch 0.4238309860229492
----------------------------------
train loss for this epoch:  0.186715
time for this epoch 48.56561994552612
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  25
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17748275
flow loss 0.07218775
occ loss 0.10529116
time for this batch 0.37207722663879395
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16932352
flow loss 0.06806571
occ loss 0.10125476
time for this batch 0.3798258304595947
----------------------------------
train loss for this epoch:  0.185976
time for this epoch 48.952484130859375
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  26
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21004286
flow loss 0.07732836
occ loss 0.13271043
time for this batch 0.3714118003845215
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20062654
flow loss 0.07239052
occ loss 0.12823218
time for this batch 0.348660945892334
----------------------------------
train loss for this epoch:  0.182367
time for this epoch 47.88372778892517
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  27
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18178445
flow loss 0.07314998
occ loss 0.10863085
time for this batch 0.3576667308807373
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17971875
flow loss 0.06969919
occ loss 0.110016346
time for this batch 0.39914846420288086
----------------------------------
train loss for this epoch:  0.184534
time for this epoch 48.3538384437561
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  28
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18178108
flow loss 0.06822784
occ loss 0.113549784
time for this batch 0.3690652847290039
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16920222
flow loss 0.06917126
occ loss 0.10002691
time for this batch 0.3720078468322754
----------------------------------
train loss for this epoch:  0.180888
time for this epoch 48.175294160842896
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  29
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17550509
flow loss 0.06991941
occ loss 0.10558196
time for this batch 0.34574055671691895
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15550466
flow loss 0.06985441
occ loss 0.085646994
time for this batch 0.40183353424072266
----------------------------------
train loss for this epoch:  0.179525
time for this epoch 49.13167667388916
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  30
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1533584
flow loss 0.06016505
occ loss 0.09319036
time for this batch 0.3381497859954834
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1774457
flow loss 0.069750026
occ loss 0.10769175
time for this batch 0.42263102531433105
----------------------------------
train loss for this epoch:  0.178811
time for this epoch 48.52267837524414
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  31
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19227806
flow loss 0.07323714
occ loss 0.11903726
time for this batch 0.37483763694763184
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16487257
flow loss 0.06670815
occ loss 0.09816106
time for this batch 0.42584872245788574
----------------------------------
train loss for this epoch:  0.177867
time for this epoch 49.020183801651
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  32
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20069653
flow loss 0.074397504
occ loss 0.12629502
time for this batch 0.35488295555114746
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1857995
flow loss 0.07233472
occ loss 0.11346063
time for this batch 0.368729829788208
----------------------------------
train loss for this epoch:  0.17689
time for this epoch 47.65847730636597
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  33
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17499389
flow loss 0.06825432
occ loss 0.10673634
time for this batch 0.3530924320220947
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21191032
flow loss 0.07283493
occ loss 0.13907135
time for this batch 0.3971085548400879
----------------------------------
train loss for this epoch:  0.177636
time for this epoch 48.206559896469116
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  34
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18965214
flow loss 0.06736433
occ loss 0.12228422
time for this batch 0.38429975509643555
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17817825
flow loss 0.068210244
occ loss 0.10996401
time for this batch 0.37871718406677246
----------------------------------
train loss for this epoch:  0.175071
time for this epoch 48.03818464279175
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  35
# batch:  96
i_batch:  0.0
the loss for this batch:  0.22381744
flow loss 0.077297375
occ loss 0.14651608
time for this batch 0.3733823299407959
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17032814
flow loss 0.06820695
occ loss 0.10211765
time for this batch 0.43641209602355957
----------------------------------
train loss for this epoch:  0.176129
time for this epoch 48.51428031921387
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  36
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16031425
flow loss 0.06566087
occ loss 0.09465036
time for this batch 0.33502793312072754
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18524611
flow loss 0.07008427
occ loss 0.11515802
time for this batch 0.407581090927124
----------------------------------
train loss for this epoch:  0.176505
time for this epoch 47.326499938964844
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  37
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17465836
flow loss 0.07216455
occ loss 0.102489874
time for this batch 0.3104877471923828
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17367972
flow loss 0.06463313
occ loss 0.109043196
time for this batch 0.3634152412414551
----------------------------------
train loss for this epoch:  0.174103
time for this epoch 48.60813093185425
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  38
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15510724
flow loss 0.06313506
occ loss 0.091968626
time for this batch 0.34978389739990234
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17629257
flow loss 0.06891415
occ loss 0.10737483
time for this batch 0.4242081642150879
----------------------------------
train loss for this epoch:  0.17358
time for this epoch 48.41100358963013
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  39
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1388175
flow loss 0.060737617
occ loss 0.07807648
time for this batch 0.3883342742919922
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16812573
flow loss 0.067008115
occ loss 0.10111371
time for this batch 0.37655067443847656
----------------------------------
train loss for this epoch:  0.17106
time for this epoch 47.361552715301514
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  40
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14425138
flow loss 0.058019508
occ loss 0.08622844
time for this batch 0.3766512870788574
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17656286
flow loss 0.06502725
occ loss 0.11153207
time for this batch 0.38547849655151367
----------------------------------
train loss for this epoch:  0.170516
time for this epoch 47.32167148590088
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  41
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17073615
flow loss 0.066271774
occ loss 0.10446095
time for this batch 0.35590577125549316
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17457874
flow loss 0.06564007
occ loss 0.10893482
time for this batch 0.3773155212402344
----------------------------------
train loss for this epoch:  0.170548
time for this epoch 46.8231635093689
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  42
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14709795
flow loss 0.057886407
occ loss 0.08920888
time for this batch 0.37665724754333496
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16136251
flow loss 0.06677687
occ loss 0.09458189
time for this batch 0.42268848419189453
----------------------------------
train loss for this epoch:  0.172144
time for this epoch 49.1202392578125
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  43
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16932197
flow loss 0.06774019
occ loss 0.10157798
time for this batch 0.35411500930786133
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18938881
flow loss 0.07342144
occ loss 0.1159631
time for this batch 0.38460874557495117
----------------------------------
train loss for this epoch:  0.169713
time for this epoch 49.15181541442871
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  44
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2058602
flow loss 0.07481086
occ loss 0.131045
time for this batch 0.3470876216888428
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18251945
flow loss 0.0675356
occ loss 0.11497985
time for this batch 0.3957641124725342
----------------------------------
train loss for this epoch:  0.16938
time for this epoch 48.23461866378784
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  45
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1500316
flow loss 0.06446434
occ loss 0.085563675
time for this batch 0.3556697368621826
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13453299
flow loss 0.05901866
occ loss 0.075511225
time for this batch 0.4150583744049072
----------------------------------
train loss for this epoch:  0.16962
time for this epoch 48.9553542137146
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  46
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16255492
flow loss 0.06001512
occ loss 0.10253687
time for this batch 0.36368513107299805
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1592208
flow loss 0.06226992
occ loss 0.09694762
time for this batch 0.4228529930114746
----------------------------------
train loss for this epoch:  0.17568
time for this epoch 47.25258994102478
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  47
# batch:  96
i_batch:  0.0
the loss for this batch:  0.22221568
flow loss 0.09781263
occ loss 0.124399565
time for this batch 0.37537670135498047
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20216396
flow loss 0.06834443
occ loss 0.13381553
time for this batch 0.37592339515686035
----------------------------------
train loss for this epoch:  0.173076
time for this epoch 48.4588041305542
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  48
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15110889
flow loss 0.058144983
occ loss 0.09296078
time for this batch 0.3650648593902588
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18440509
flow loss 0.070714414
occ loss 0.11368654
time for this batch 0.40241408348083496
----------------------------------
train loss for this epoch:  0.167404
time for this epoch 47.56645631790161
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  49
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16746843
flow loss 0.0683285
occ loss 0.09913571
time for this batch 0.3860299587249756
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22204842
flow loss 0.07628816
occ loss 0.14575651
time for this batch 0.3921525478363037
----------------------------------
train loss for this epoch:  0.167232
time for this epoch 46.01859951019287
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  50
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1395007
flow loss 0.059314758
occ loss 0.08018275
time for this batch 0.3840022087097168
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19726738
flow loss 0.06769571
occ loss 0.12956777
time for this batch 0.43278002738952637
----------------------------------
train loss for this epoch:  0.167204
time for this epoch 47.376731872558594
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  51
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14967328
flow loss 0.06304726
occ loss 0.08662284
time for this batch 0.32445573806762695
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14936213
flow loss 0.062491015
occ loss 0.08686792
time for this batch 0.38470005989074707
----------------------------------
train loss for this epoch:  0.165766
time for this epoch 47.998345136642456
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  52
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1301652
flow loss 0.055756986
occ loss 0.074404836
time for this batch 0.31151366233825684
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19702545
flow loss 0.07017473
occ loss 0.12684672
time for this batch 0.45148324966430664
----------------------------------
train loss for this epoch:  0.175489
time for this epoch 47.37190127372742
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  53
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15343998
flow loss 0.06657438
occ loss 0.08686284
time for this batch 0.3490443229675293
----------------------------------
i_batch:  64.0
the loss for this batch:  0.164764
flow loss 0.061754234
occ loss 0.103006214
time for this batch 0.44585609436035156
----------------------------------
train loss for this epoch:  0.170042
time for this epoch 49.10799312591553
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  54
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20235604
flow loss 0.07117953
occ loss 0.1311719
time for this batch 0.34053564071655273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17545484
flow loss 0.06335245
occ loss 0.11209884
time for this batch 0.42669224739074707
----------------------------------
train loss for this epoch:  0.165699
time for this epoch 48.60786962509155
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  55
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1624221
flow loss 0.064007476
occ loss 0.09841062
time for this batch 0.3544185161590576
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19678624
flow loss 0.071337394
occ loss 0.12544462
time for this batch 0.3774077892303467
----------------------------------
train loss for this epoch:  0.167242
time for this epoch 48.58372664451599
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  56
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16200142
flow loss 0.06008175
occ loss 0.10191631
time for this batch 0.3655273914337158
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16055019
flow loss 0.06686712
occ loss 0.09367955
time for this batch 0.3804168701171875
----------------------------------
train loss for this epoch:  0.16471
time for this epoch 48.013264417648315
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  57
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1654493
flow loss 0.062798105
occ loss 0.10264769
time for this batch 0.33966946601867676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16817605
flow loss 0.067216516
occ loss 0.10095622
time for this batch 0.4583888053894043
----------------------------------
train loss for this epoch:  0.165366
time for this epoch 48.930795192718506
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  58
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14108025
flow loss 0.056518562
occ loss 0.08455847
time for this batch 0.3278672695159912
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15720654
flow loss 0.06105753
occ loss 0.09614504
time for this batch 0.4100980758666992
----------------------------------
train loss for this epoch:  0.164116
time for this epoch 48.94134879112244
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  59
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17138296
flow loss 0.06594843
occ loss 0.105430804
time for this batch 0.3543379306793213
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1479333
flow loss 0.05955894
occ loss 0.08837096
time for this batch 0.4309353828430176
----------------------------------
train loss for this epoch:  0.163366
time for this epoch 48.50791072845459
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  60
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14599729
flow loss 0.05793529
occ loss 0.08805906
time for this batch 0.3811302185058594
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16601025
flow loss 0.060948998
occ loss 0.10505745
time for this batch 0.4286015033721924
----------------------------------
train loss for this epoch:  0.1634
time for this epoch 49.03926062583923
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  61
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15682493
flow loss 0.059764255
occ loss 0.09705733
time for this batch 0.3518846035003662
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2103755
flow loss 0.06967225
occ loss 0.14069891
time for this batch 0.3422842025756836
----------------------------------
train loss for this epoch:  0.16369
time for this epoch 47.48237204551697
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  62
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15299995
flow loss 0.058844604
occ loss 0.094151385
time for this batch 0.3732943534851074
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1603962
flow loss 0.062561356
occ loss 0.09783084
time for this batch 0.44892406463623047
----------------------------------
train loss for this epoch:  0.162401
time for this epoch 48.24963712692261
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  63
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16663623
flow loss 0.060102306
occ loss 0.106530175
time for this batch 0.37777042388916016
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17610762
flow loss 0.063486025
occ loss 0.11261782
time for this batch 0.3977179527282715
----------------------------------
train loss for this epoch:  0.162631
time for this epoch 49.09180212020874
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  64
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18015581
flow loss 0.06217591
occ loss 0.11797593
time for this batch 0.3798339366912842
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14477997
flow loss 0.058048356
occ loss 0.0867282
time for this batch 0.4409308433532715
----------------------------------
train loss for this epoch:  0.16234
time for this epoch 48.55914616584778
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  65
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12379548
flow loss 0.053947333
occ loss 0.06984481
time for this batch 0.3513803482055664
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14412844
flow loss 0.05826617
occ loss 0.085858926
time for this batch 0.42100048065185547
----------------------------------
train loss for this epoch:  0.162344
time for this epoch 48.90671968460083
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  66
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17886212
flow loss 0.062282596
occ loss 0.116575964
time for this batch 0.351548433303833
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18462011
flow loss 0.068777695
occ loss 0.11583843
time for this batch 0.4202253818511963
----------------------------------
train loss for this epoch:  0.164556
time for this epoch 48.27514863014221
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  67
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16818981
flow loss 0.068382904
occ loss 0.099803485
time for this batch 0.3527536392211914
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16940564
flow loss 0.06530964
occ loss 0.10409217
time for this batch 0.37507152557373047
----------------------------------
train loss for this epoch:  0.163829
time for this epoch 48.57910346984863
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  68
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16885494
flow loss 0.06000575
occ loss 0.10884523
time for this batch 0.37653088569641113
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1558515
flow loss 0.060984846
occ loss 0.0948629
time for this batch 0.41900157928466797
----------------------------------
train loss for this epoch:  0.160189
time for this epoch 48.00223708152771
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  69
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16449969
flow loss 0.06281728
occ loss 0.10167863
time for this batch 0.3294677734375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17912185
flow loss 0.06581363
occ loss 0.11330428
time for this batch 0.39698123931884766
----------------------------------
train loss for this epoch:  0.161657
time for this epoch 46.77592945098877
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  70
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1633225
flow loss 0.061473932
occ loss 0.101844616
time for this batch 0.3639068603515625
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19304112
flow loss 0.06674568
occ loss 0.12629084
time for this batch 0.37119460105895996
----------------------------------
train loss for this epoch:  0.161723
time for this epoch 47.34547162055969
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  71
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1819989
flow loss 0.06231061
occ loss 0.11968431
time for this batch 0.48946309089660645
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13934657
flow loss 0.05623801
occ loss 0.08310519
time for this batch 0.38545656204223633
----------------------------------
train loss for this epoch:  0.161293
time for this epoch 46.30097150802612
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  72
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16320747
flow loss 0.06203542
occ loss 0.101168245
time for this batch 0.3555269241333008
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16515255
flow loss 0.060773008
occ loss 0.10437541
time for this batch 0.3433253765106201
----------------------------------
train loss for this epoch:  0.159651
time for this epoch 46.33944249153137
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  73
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15304148
flow loss 0.058992483
occ loss 0.09404557
time for this batch 0.34882569313049316
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20429952
flow loss 0.06498496
occ loss 0.1393099
time for this batch 0.4361252784729004
----------------------------------
train loss for this epoch:  0.160222
time for this epoch 48.874319553375244
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  74
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1502972
flow loss 0.057655223
occ loss 0.09263848
time for this batch 0.36963987350463867
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14534463
flow loss 0.054627705
occ loss 0.090713926
time for this batch 0.38800597190856934
----------------------------------
train loss for this epoch:  0.158143
time for this epoch 47.59178280830383
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  75
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16653363
flow loss 0.064314276
occ loss 0.10221544
time for this batch 0.30694007873535156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14741513
flow loss 0.05857381
occ loss 0.088837944
time for this batch 0.3862731456756592
----------------------------------
train loss for this epoch:  0.158891
time for this epoch 46.95220708847046
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  76
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14054984
flow loss 0.060432997
occ loss 0.08011308
time for this batch 0.3370475769042969
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16504493
flow loss 0.061502505
occ loss 0.103539184
time for this batch 0.41141653060913086
----------------------------------
train loss for this epoch:  0.159407
time for this epoch 49.118690490722656
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  77
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15557976
flow loss 0.060244463
occ loss 0.09533172
time for this batch 0.3860313892364502
----------------------------------
i_batch:  64.0
the loss for this batch:  0.09179335
flow loss 0.044492826
occ loss 0.047298595
time for this batch 0.3787364959716797
----------------------------------
train loss for this epoch:  0.159733
time for this epoch 48.02308130264282
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  78
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17141105
flow loss 0.062380843
occ loss 0.109026484
time for this batch 0.43531107902526855
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17656429
flow loss 0.062957935
occ loss 0.11360227
time for this batch 0.417377233505249
----------------------------------
train loss for this epoch:  0.158151
time for this epoch 49.558494567871094
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  79
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15221302
flow loss 0.06030764
occ loss 0.09190143
time for this batch 0.38852477073669434
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18145454
flow loss 0.06546498
occ loss 0.1159855
time for this batch 0.3756732940673828
----------------------------------
train loss for this epoch:  0.158387
time for this epoch 47.055466651916504
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  80
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14434236
flow loss 0.05477703
occ loss 0.08956174
time for this batch 0.3565237522125244
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13532852
flow loss 0.05567025
occ loss 0.07965465
time for this batch 0.39423084259033203
----------------------------------
train loss for this epoch:  0.156832
time for this epoch 43.95597767829895
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  81
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14741363
flow loss 0.055525746
occ loss 0.0918845
time for this batch 0.34742093086242676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13627635
flow loss 0.0554684
occ loss 0.08080489
time for this batch 0.34770894050598145
----------------------------------
train loss for this epoch:  0.157059
time for this epoch 47.59606170654297
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  82
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14671025
flow loss 0.057734597
occ loss 0.08897226
time for this batch 0.349320650100708
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1260686
flow loss 0.05530423
occ loss 0.07076085
time for this batch 0.3963809013366699
----------------------------------
train loss for this epoch:  0.157147
time for this epoch 48.56378698348999
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  83
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16160542
flow loss 0.059492934
occ loss 0.102108754
time for this batch 0.3604090213775635
----------------------------------
i_batch:  64.0
the loss for this batch:  0.106584735
flow loss 0.048291985
occ loss 0.05829035
time for this batch 0.36638593673706055
----------------------------------
train loss for this epoch:  0.158334
time for this epoch 47.07132935523987
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  84
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14264031
flow loss 0.053693615
occ loss 0.08894326
time for this batch 0.3861382007598877
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17757611
flow loss 0.059780207
occ loss 0.11779206
time for this batch 0.3683946132659912
----------------------------------
train loss for this epoch:  0.15691
time for this epoch 48.94051480293274
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  85
# batch:  96
i_batch:  0.0
the loss for this batch:  0.10570159
flow loss 0.049293537
occ loss 0.05640537
time for this batch 0.35743188858032227
----------------------------------
i_batch:  64.0
the loss for this batch:  0.122541964
flow loss 0.050529774
occ loss 0.07200907
time for this batch 0.4308180809020996
----------------------------------
train loss for this epoch:  0.156633
time for this epoch 48.42792582511902
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  86
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13901012
flow loss 0.053947717
occ loss 0.08505918
time for this batch 0.3635077476501465
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19122322
flow loss 0.06428098
occ loss 0.12693852
time for this batch 0.4046196937561035
----------------------------------
train loss for this epoch:  0.157955
time for this epoch 48.14636182785034
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  87
# batch:  96
i_batch:  0.0
the loss for this batch:  0.122390576
flow loss 0.053212784
occ loss 0.069174774
time for this batch 0.3183915615081787
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15431857
flow loss 0.058872417
occ loss 0.09544258
time for this batch 0.4207127094268799
----------------------------------
train loss for this epoch:  0.156352
time for this epoch 48.11696982383728
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  88
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13351944
flow loss 0.052580718
occ loss 0.080935635
time for this batch 0.33232951164245605
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13713503
flow loss 0.053832527
occ loss 0.08329955
time for this batch 0.38386058807373047
----------------------------------
train loss for this epoch:  0.156407
time for this epoch 47.90018916130066
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  89
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12978147
flow loss 0.051611856
occ loss 0.07816628
time for this batch 0.3499164581298828
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18425357
flow loss 0.06555784
occ loss 0.118691586
time for this batch 0.4010899066925049
----------------------------------
train loss for this epoch:  0.156299
time for this epoch 46.603870153427124
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  90
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14668864
flow loss 0.054683022
occ loss 0.092002414
time for this batch 0.36347007751464844
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16588266
flow loss 0.059996277
occ loss 0.10588232
time for this batch 0.38941431045532227
----------------------------------
train loss for this epoch:  0.156521
time for this epoch 49.33804726600647
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  91
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18066716
flow loss 0.060931224
occ loss 0.11973218
time for this batch 0.39290833473205566
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13060686
flow loss 0.052471515
occ loss 0.078131765
time for this batch 0.3804457187652588
----------------------------------
train loss for this epoch:  0.1555
time for this epoch 48.719746828079224
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  92
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16360791
flow loss 0.061375357
occ loss 0.10222871
time for this batch 0.3340141773223877
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15322076
flow loss 0.056563396
occ loss 0.096653976
time for this batch 0.3909025192260742
----------------------------------
train loss for this epoch:  0.155835
time for this epoch 47.650012731552124
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  93
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17350234
flow loss 0.06302252
occ loss 0.11047617
time for this batch 0.3594996929168701
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15468298
flow loss 0.057049483
occ loss 0.097629964
time for this batch 0.4230234622955322
----------------------------------
train loss for this epoch:  0.155595
time for this epoch 48.256937980651855
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  94
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15919222
flow loss 0.05492577
occ loss 0.10426311
time for this batch 0.4753851890563965
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17866047
flow loss 0.06393842
occ loss 0.11471796
time for this batch 0.3918893337249756
----------------------------------
train loss for this epoch:  0.155405
time for this epoch 47.46809959411621
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  95
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15942144
flow loss 0.060710292
occ loss 0.098707125
time for this batch 0.32202935218811035
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14200701
flow loss 0.057956804
occ loss 0.08404669
time for this batch 0.31537342071533203
----------------------------------
train loss for this epoch:  0.155794
time for this epoch 42.270047664642334
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  96
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16466129
flow loss 0.06100275
occ loss 0.10365469
time for this batch 0.2802438735961914
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16394056
flow loss 0.05643364
occ loss 0.10750326
time for this batch 0.34631824493408203
----------------------------------
train loss for this epoch:  0.154949
time for this epoch 42.09691143035889
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  97
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1489404
flow loss 0.054535206
occ loss 0.09440174
time for this batch 0.3399941921234131
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1838057
flow loss 0.06318152
occ loss 0.120619826
time for this batch 0.3516111373901367
----------------------------------
train loss for this epoch:  0.154523
time for this epoch 42.72897458076477
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  98
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13942412
flow loss 0.0563885
occ loss 0.08303193
time for this batch 0.33379411697387695
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15298492
flow loss 0.05965528
occ loss 0.09332623
time for this batch 0.3623507022857666
----------------------------------
train loss for this epoch:  0.154782
time for this epoch 47.80368137359619
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  99
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19195934
flow loss 0.0643954
occ loss 0.12755965
time for this batch 0.34965991973876953
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14187121
flow loss 0.057175778
occ loss 0.084692195
time for this batch 0.40005922317504883
----------------------------------
train loss for this epoch:  0.154975
time for this epoch 46.84027624130249
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  100
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14591907
flow loss 0.056475382
occ loss 0.089439906
time for this batch 0.472919225692749
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14867939
flow loss 0.058819205
occ loss 0.08985658
time for this batch 0.3612802028656006
----------------------------------
train loss for this epoch:  0.153915
time for this epoch 46.690550088882446
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  101
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1304063
flow loss 0.051875405
occ loss 0.0785278
time for this batch 0.36301255226135254
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14935437
flow loss 0.054613363
occ loss 0.09473769
time for this batch 0.41179585456848145
----------------------------------
train loss for this epoch:  0.154126
time for this epoch 48.23427200317383
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  102
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1377535
flow loss 0.056589488
occ loss 0.08115992
time for this batch 0.33768534660339355
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16355059
flow loss 0.058376454
occ loss 0.105170324
time for this batch 0.4018535614013672
----------------------------------
train loss for this epoch:  0.15382
time for this epoch 48.383814573287964
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  103
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16658063
flow loss 0.06129822
occ loss 0.1052786
time for this batch 0.3965463638305664
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1433267
flow loss 0.055553645
occ loss 0.08776961
time for this batch 0.43162965774536133
----------------------------------
train loss for this epoch:  0.153142
time for this epoch 48.45048451423645
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  104
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16333833
flow loss 0.058407564
occ loss 0.10492718
time for this batch 0.3228442668914795
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13046542
flow loss 0.054536216
occ loss 0.075925894
time for this batch 0.4259371757507324
----------------------------------
train loss for this epoch:  0.155076
time for this epoch 48.03770089149475
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  105
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17693822
flow loss 0.06800005
occ loss 0.10893399
time for this batch 0.3591039180755615
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1688739
flow loss 0.05860214
occ loss 0.11026794
time for this batch 0.3851449489593506
----------------------------------
train loss for this epoch:  0.154649
time for this epoch 48.05649423599243
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  106
# batch:  96
i_batch:  0.0
the loss for this batch:  0.183798
flow loss 0.06277251
occ loss 0.12102138
time for this batch 0.3096141815185547
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13524774
flow loss 0.053556755
occ loss 0.08168778
time for this batch 0.4092104434967041
----------------------------------
train loss for this epoch:  0.153283
time for this epoch 54.439125776290894
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  107
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1149971
flow loss 0.04983301
occ loss 0.065161295
time for this batch 0.38738512992858887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16430652
flow loss 0.057795264
occ loss 0.10650746
time for this batch 0.3842334747314453
----------------------------------
train loss for this epoch:  0.1534
time for this epoch 51.8572781085968
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  108
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12346069
flow loss 0.048478093
occ loss 0.07497962
time for this batch 0.5159425735473633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16322987
flow loss 0.05634631
occ loss 0.10688014
time for this batch 1.4420912265777588
----------------------------------
train loss for this epoch:  0.153063
time for this epoch 75.31813931465149
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  109
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17466755
flow loss 0.060741894
occ loss 0.11392129
time for this batch 0.3504760265350342
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13519986
flow loss 0.050977238
occ loss 0.0842195
time for this batch 0.44594883918762207
----------------------------------
train loss for this epoch:  0.153264
time for this epoch 48.193124532699585
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  110
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19148584
flow loss 0.06421292
occ loss 0.12726875
time for this batch 0.36832547187805176
----------------------------------
i_batch:  64.0
the loss for this batch:  0.10854745
flow loss 0.044212658
occ loss 0.06433223
time for this batch 0.4330925941467285
----------------------------------
train loss for this epoch:  0.152742
time for this epoch 48.091710805892944
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  111
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17226669
flow loss 0.059151605
occ loss 0.113111004
time for this batch 0.33197021484375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16244806
flow loss 0.055066045
occ loss 0.107378125
time for this batch 0.41382861137390137
----------------------------------
train loss for this epoch:  0.153208
time for this epoch 47.75213384628296
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  112
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18823104
flow loss 0.06625065
occ loss 0.121975936
time for this batch 0.3885006904602051
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1681736
flow loss 0.06091124
occ loss 0.10725863
time for this batch 0.3857839107513428
----------------------------------
train loss for this epoch:  0.152679
time for this epoch 49.839924573898315
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  113
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16741836
flow loss 0.06086225
occ loss 0.106552124
time for this batch 0.382922887802124
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14497061
flow loss 0.05519462
occ loss 0.0897723
time for this batch 0.3604443073272705
----------------------------------
train loss for this epoch:  0.152967
time for this epoch 46.95431661605835
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  114
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12602952
flow loss 0.050280012
occ loss 0.07574618
time for this batch 0.4330275058746338
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13153772
flow loss 0.054329023
occ loss 0.07720513
time for this batch 0.46374964714050293
----------------------------------
train loss for this epoch:  0.152005
time for this epoch 46.88751220703125
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  115
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16255175
flow loss 0.055763863
occ loss 0.10678411
time for this batch 0.34894371032714844
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18300898
flow loss 0.062251166
occ loss 0.12075362
time for this batch 0.4055664539337158
----------------------------------
train loss for this epoch:  0.153055
time for this epoch 48.50746941566467
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  116
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14684448
flow loss 0.055526275
occ loss 0.0913146
time for this batch 0.3440675735473633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17899498
flow loss 0.06373162
occ loss 0.11525949
time for this batch 0.43737077713012695
----------------------------------
train loss for this epoch:  0.153985
time for this epoch 48.2318160533905
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  117
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13286278
flow loss 0.053624213
occ loss 0.07923503
time for this batch 0.31334424018859863
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15855844
flow loss 0.055797502
occ loss 0.10275768
time for this batch 0.4033377170562744
----------------------------------
train loss for this epoch:  0.153032
time for this epoch 48.607205390930176
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  118
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13103913
flow loss 0.04999762
occ loss 0.08103849
time for this batch 0.4180934429168701
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19345874
flow loss 0.06380035
occ loss 0.12965429
time for this batch 0.441882848739624
----------------------------------
train loss for this epoch:  0.151938
time for this epoch 48.40232253074646
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  119
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14836285
flow loss 0.05572964
occ loss 0.092629574
time for this batch 0.36736559867858887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14858916
flow loss 0.057105444
occ loss 0.09147999
time for this batch 0.3897860050201416
----------------------------------
train loss for this epoch:  0.151848
time for this epoch 49.22481966018677
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  120
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13345353
flow loss 0.052221518
occ loss 0.08122849
time for this batch 0.4228518009185791
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1808026
flow loss 0.061536387
occ loss 0.11926206
time for this batch 0.41322827339172363
----------------------------------
train loss for this epoch:  0.15176
time for this epoch 49.416242837905884
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  121
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17228964
flow loss 0.060158078
occ loss 0.11212768
time for this batch 0.3858671188354492
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17762356
flow loss 0.060714982
occ loss 0.11690427
time for this batch 0.41666746139526367
----------------------------------
train loss for this epoch:  0.153452
time for this epoch 48.580647706985474
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  122
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1507359
flow loss 0.053741932
occ loss 0.096990585
time for this batch 0.367295503616333
----------------------------------
i_batch:  64.0
the loss for this batch:  0.11658298
flow loss 0.049762975
occ loss 0.066816844
time for this batch 0.31035852432250977
----------------------------------
train loss for this epoch:  0.150996
time for this epoch 48.92133712768555
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  123
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15034945
flow loss 0.052407537
occ loss 0.09793838
time for this batch 0.39115214347839355
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14307481
flow loss 0.058761332
occ loss 0.08431017
time for this batch 0.3783547878265381
----------------------------------
train loss for this epoch:  0.153087
time for this epoch 47.55804705619812
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  124
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15911566
flow loss 0.06062068
occ loss 0.098491125
time for this batch 0.35712218284606934
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13844518
flow loss 0.054627094
occ loss 0.083814636
time for this batch 0.3547642230987549
----------------------------------
train loss for this epoch:  0.153342
time for this epoch 47.53474736213684
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  125
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15517087
flow loss 0.056642205
occ loss 0.098524675
time for this batch 0.3610706329345703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15226923
flow loss 0.05580119
occ loss 0.09646458
time for this batch 0.34873175621032715
----------------------------------
train loss for this epoch:  0.151029
time for this epoch 47.303492069244385
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  126
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18501183
flow loss 0.06406085
occ loss 0.120946504
time for this batch 0.3747522830963135
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12147129
flow loss 0.050724506
occ loss 0.07074384
time for this batch 0.36142802238464355
----------------------------------
train loss for this epoch:  0.150597
time for this epoch 47.92799758911133
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  127
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1391087
flow loss 0.054089043
occ loss 0.08501614
time for this batch 0.37507128715515137
----------------------------------
i_batch:  64.0
the loss for this batch:  0.164035
flow loss 0.061666444
occ loss 0.10236463
time for this batch 0.4217045307159424
----------------------------------
train loss for this epoch:  0.151178
time for this epoch 47.78583908081055
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  128
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17402917
flow loss 0.06078452
occ loss 0.11324035
time for this batch 0.43570899963378906
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14160758
flow loss 0.05520681
occ loss 0.08639732
time for this batch 0.42159199714660645
----------------------------------
train loss for this epoch:  0.151494
time for this epoch 48.13316750526428
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  129
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12824759
flow loss 0.05105027
occ loss 0.07719419
time for this batch 0.33087849617004395
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13342027
flow loss 0.048547875
occ loss 0.08486916
time for this batch 0.4084150791168213
----------------------------------
train loss for this epoch:  0.151749
time for this epoch 48.350919246673584
No_decrease:  15
----------------an epoch starts-------------------
i_epoch:  130
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15414113
flow loss 0.058416277
occ loss 0.09572097
time for this batch 0.38320112228393555
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20102887
flow loss 0.06430148
occ loss 0.1367229
time for this batch 0.428631067276001
----------------------------------
train loss for this epoch:  0.15159
time for this epoch 49.86769485473633
No_decrease:  16
----------------an epoch starts-------------------
i_epoch:  131
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15905717
flow loss 0.05454856
occ loss 0.10450499
time for this batch 0.3558349609375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13805324
flow loss 0.052560758
occ loss 0.08548897
time for this batch 0.431743860244751
----------------------------------
train loss for this epoch:  0.150836
time for this epoch 48.5361111164093
No_decrease:  17
----------------an epoch starts-------------------
i_epoch:  132
# batch:  96
i_batch:  0.0
the loss for this batch:  0.137541
flow loss 0.05250831
occ loss 0.08502925
time for this batch 0.37985754013061523
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12338389
flow loss 0.052908577
occ loss 0.07047258
time for this batch 0.34657979011535645
----------------------------------
train loss for this epoch:  0.150395
time for this epoch 46.31313395500183
No_decrease:  18
----------------an epoch starts-------------------
i_epoch:  133
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13934125
flow loss 0.053584315
occ loss 0.08575317
time for this batch 0.3800826072692871
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17148034
flow loss 0.05887843
occ loss 0.11259807
time for this batch 0.43180084228515625
----------------------------------
train loss for this epoch:  0.151081
time for this epoch 49.412429332733154
No_decrease:  19
----------------an epoch starts-------------------
i_epoch:  134
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11723593
flow loss 0.047662918
occ loss 0.06957013
time for this batch 0.4618384838104248
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16529046
flow loss 0.066635184
occ loss 0.098651305
time for this batch 0.46510982513427734
----------------------------------
train loss for this epoch:  0.16402
time for this epoch 49.32848501205444
No_decrease:  20
----------------an epoch starts-------------------
i_epoch:  135
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1326901
flow loss 0.05296177
occ loss 0.07972485
time for this batch 0.34221625328063965
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15021199
flow loss 0.05327739
occ loss 0.09693108
time for this batch 0.42858004570007324
----------------------------------
train loss for this epoch:  0.150988
time for this epoch 48.66980242729187
No_decrease:  21
----------------an epoch starts-------------------
i_epoch:  136
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15322165
flow loss 0.05645263
occ loss 0.09676574
time for this batch 0.32892704010009766
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14451122
flow loss 0.052126683
occ loss 0.09238113
time for this batch 0.42034006118774414
----------------------------------
train loss for this epoch:  0.151694
time for this epoch 48.82072043418884
No_decrease:  22
----------------an epoch starts-------------------
i_epoch:  137
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16053507
flow loss 0.059061535
occ loss 0.10146975
time for this batch 0.36021924018859863
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15984294
flow loss 0.058162685
occ loss 0.10167618
time for this batch 0.40193676948547363
----------------------------------
train loss for this epoch:  0.149227
time for this epoch 48.66748380661011
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  138
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12337299
flow loss 0.050123047
occ loss 0.07324664
time for this batch 0.3686356544494629
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1344463
flow loss 0.053563293
occ loss 0.080879405
time for this batch 0.3496818542480469
----------------------------------
train loss for this epoch:  0.148407
time for this epoch 46.99736714363098
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  139
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1750883
flow loss 0.06063973
occ loss 0.11444433
time for this batch 0.34031081199645996
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14804655
flow loss 0.05519394
occ loss 0.092848636
time for this batch 0.4165029525756836
----------------------------------
train loss for this epoch:  0.149991
time for this epoch 46.832175493240356
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  140
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14024787
flow loss 0.056243718
occ loss 0.08400105
time for this batch 0.3664124011993408
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13723361
flow loss 0.05043841
occ loss 0.08679161
time for this batch 0.42702674865722656
----------------------------------
train loss for this epoch:  0.150308
time for this epoch 48.53786873817444
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  141
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18540817
flow loss 0.06156114
occ loss 0.123843074
time for this batch 0.3714311122894287
----------------------------------
i_batch:  64.0
the loss for this batch:  0.10475438
flow loss 0.047331482
occ loss 0.057419967
time for this batch 0.4130275249481201
----------------------------------
train loss for this epoch:  0.151269
time for this epoch 48.701456785202026
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  142
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15230897
flow loss 0.056988478
occ loss 0.09531672
time for this batch 0.34523558616638184
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19590102
flow loss 0.06482755
occ loss 0.131069
time for this batch 0.4243888854980469
----------------------------------
train loss for this epoch:  0.149924
time for this epoch 49.0816810131073
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  143
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13700391
flow loss 0.053851314
occ loss 0.08314891
time for this batch 0.3692820072174072
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13628751
flow loss 0.050823182
occ loss 0.08546103
time for this batch 0.37779831886291504
----------------------------------
train loss for this epoch:  0.148676
time for this epoch 47.69076895713806
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  144
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14415637
flow loss 0.05302428
occ loss 0.09112851
time for this batch 0.3416774272918701
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17605577
flow loss 0.062204383
occ loss 0.11384788
time for this batch 0.39040708541870117
----------------------------------
train loss for this epoch:  0.15138
time for this epoch 47.328773736953735
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  145
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16048549
flow loss 0.059191722
occ loss 0.1012898
time for this batch 0.36072230339050293
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17336866
flow loss 0.05730767
occ loss 0.11605741
time for this batch 0.3767120838165283
----------------------------------
train loss for this epoch:  0.149794
time for this epoch 46.05568766593933
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  146
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1694465
flow loss 0.05674397
occ loss 0.1126988
time for this batch 0.4194481372833252
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15426971
flow loss 0.05687571
occ loss 0.0973901
time for this batch 0.3546257019042969
----------------------------------
train loss for this epoch:  0.149489
time for this epoch 48.79438352584839
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  147
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13673115
flow loss 0.050575092
occ loss 0.08615239
time for this batch 0.34584522247314453
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16329494
flow loss 0.058794074
occ loss 0.10449685
time for this batch 0.41993141174316406
----------------------------------
train loss for this epoch:  0.148699
time for this epoch 48.36736035346985
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  148
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15239167
flow loss 0.05571376
occ loss 0.096674286
time for this batch 0.4483973979949951
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13568044
flow loss 0.051957175
occ loss 0.08371973
time for this batch 0.4216299057006836
----------------------------------
train loss for this epoch:  0.149206
time for this epoch 48.39578890800476
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  149
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15056293
flow loss 0.054045547
occ loss 0.09651371
time for this batch 0.39874815940856934
----------------------------------
i_batch:  64.0
the loss for this batch:  0.11048811
flow loss 0.0468958
occ loss 0.063589394
time for this batch 0.3790419101715088
----------------------------------
train loss for this epoch:  0.148793
time for this epoch 48.63117527961731
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  150
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13555527
flow loss 0.04876104
occ loss 0.0867911
time for this batch 0.3571956157684326
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1588915
flow loss 0.054990582
occ loss 0.10389668
time for this batch 0.3586161136627197
----------------------------------
train loss for this epoch:  0.143306
time for this epoch 48.968064308166504
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  151
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11689623
flow loss 0.045602232
occ loss 0.07129066
time for this batch 0.35057735443115234
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1715864
flow loss 0.058024228
occ loss 0.11355807
time for this batch 0.41805481910705566
----------------------------------
train loss for this epoch:  0.14186
time for this epoch 46.93658137321472
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  152
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1425421
flow loss 0.052010484
occ loss 0.090528026
time for this batch 0.29692625999450684
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16404937
flow loss 0.057037167
occ loss 0.10700787
time for this batch 0.42299652099609375
----------------------------------
train loss for this epoch:  0.141622
time for this epoch 49.510366916656494
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  153
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14110069
flow loss 0.053462032
occ loss 0.08763505
time for this batch 0.3752443790435791
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14418274
flow loss 0.05281753
occ loss 0.091361426
time for this batch 0.4225616455078125
----------------------------------
train loss for this epoch:  0.141313
time for this epoch 48.19930076599121
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  154
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1492642
flow loss 0.056301307
occ loss 0.09295891
time for this batch 0.32993268966674805
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1332174
flow loss 0.050122634
occ loss 0.08309128
time for this batch 0.436403751373291
----------------------------------
train loss for this epoch:  0.141158
time for this epoch 47.690552711486816
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  155
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14847776
flow loss 0.052358687
occ loss 0.09611527
time for this batch 0.3659815788269043
----------------------------------
i_batch:  64.0
the loss for this batch:  0.120160624
flow loss 0.043309852
occ loss 0.07684773
time for this batch 0.3982863426208496
----------------------------------
train loss for this epoch:  0.14186
time for this epoch 48.41939449310303
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  156
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11521742
flow loss 0.043076817
occ loss 0.07213783
time for this batch 0.3396167755126953
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13693623
flow loss 0.047813963
occ loss 0.08911875
time for this batch 0.36861443519592285
----------------------------------
train loss for this epoch:  0.141363
time for this epoch 47.951122522354126
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  157
# batch:  96
i_batch:  0.0
the loss for this batch:  0.103496626
flow loss 0.042418074
occ loss 0.06107583
time for this batch 0.4069526195526123
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15236458
flow loss 0.050573174
occ loss 0.10178769
time for this batch 0.36821579933166504
----------------------------------
train loss for this epoch:  0.140886
time for this epoch 45.5228431224823
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  158
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14644124
flow loss 0.050983492
occ loss 0.09545395
time for this batch 0.28082752227783203
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14728628
flow loss 0.05183949
occ loss 0.09544319
time for this batch 0.40016698837280273
----------------------------------
train loss for this epoch:  0.140881
time for this epoch 44.79819297790527
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  159
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12012824
flow loss 0.046982106
occ loss 0.073142655
time for this batch 0.2748756408691406
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14835136
flow loss 0.052588236
occ loss 0.09575908
time for this batch 0.35581374168395996
----------------------------------
train loss for this epoch:  0.141031
time for this epoch 43.18134164810181
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  160
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13372913
flow loss 0.04997813
occ loss 0.08374728
time for this batch 0.31995177268981934
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1601174
flow loss 0.05494273
occ loss 0.10517094
time for this batch 0.40383315086364746
----------------------------------
train loss for this epoch:  0.140781
time for this epoch 44.06944227218628
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  161
# batch:  96
i_batch:  0.0
the loss for this batch:  0.09086891
flow loss 0.03804787
occ loss 0.052818526
time for this batch 0.3625476360321045
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14746961
flow loss 0.052044205
occ loss 0.095421456
time for this batch 0.4116969108581543
----------------------------------
train loss for this epoch:  0.140803
time for this epoch 44.25931406021118
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  162
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17450859
flow loss 0.058391597
occ loss 0.11611271
time for this batch 0.3290250301361084
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15234311
flow loss 0.0517727
occ loss 0.10056648
time for this batch 0.379960298538208
----------------------------------
train loss for this epoch:  0.140778
time for this epoch 44.29066324234009
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  163
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1067864
flow loss 0.045683444
occ loss 0.061099738
time for this batch 0.34609007835388184
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15447095
flow loss 0.05416426
occ loss 0.10030269
time for this batch 0.38927459716796875
----------------------------------
train loss for this epoch:  0.140473
time for this epoch 45.47923970222473
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  164
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14567664
flow loss 0.056109726
occ loss 0.089563
time for this batch 0.3171398639678955
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13705681
flow loss 0.05070455
occ loss 0.08634868
time for this batch 0.3407881259918213
----------------------------------
train loss for this epoch:  0.140786
time for this epoch 44.639572620391846
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  165
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15258169
flow loss 0.05284744
occ loss 0.09973032
time for this batch 0.30994749069213867
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1497818
flow loss 0.054342132
occ loss 0.09543574
time for this batch 0.41049933433532715
----------------------------------
train loss for this epoch:  0.140685
time for this epoch 42.31248331069946
No_decrease:  15
----------------an epoch starts-------------------
i_epoch:  166
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1434114
flow loss 0.05293727
occ loss 0.09047021
time for this batch 0.36052680015563965
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12529981
flow loss 0.049442824
occ loss 0.07585344
time for this batch 0.4349403381347656
----------------------------------
train loss for this epoch:  0.140705
time for this epoch 44.39147162437439
No_decrease:  16
----------------an epoch starts-------------------
i_epoch:  167
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13913238
flow loss 0.051208746
occ loss 0.08791997
time for this batch 0.270003080368042
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16061507
flow loss 0.052580856
occ loss 0.1080302
time for this batch 0.29834556579589844
----------------------------------
train loss for this epoch:  0.140447
time for this epoch 44.205933570861816
No_decrease:  17
----------------an epoch starts-------------------
i_epoch:  168
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13484085
flow loss 0.04779384
occ loss 0.087043606
time for this batch 0.35243844985961914
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1416411
flow loss 0.052629292
occ loss 0.08900834
time for this batch 0.37276530265808105
----------------------------------
train loss for this epoch:  0.140878
time for this epoch 43.29487705230713
No_decrease:  18
----------------an epoch starts-------------------
i_epoch:  169
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14793277
flow loss 0.05334263
occ loss 0.09458605
time for this batch 0.3545382022857666
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1315937
flow loss 0.04822034
occ loss 0.08336967
time for this batch 0.38369154930114746
----------------------------------
train loss for this epoch:  0.140342
time for this epoch 44.275460720062256
No_decrease:  19
----------------an epoch starts-------------------
i_epoch:  170
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17096914
flow loss 0.053388275
occ loss 0.11757699
time for this batch 0.30662107467651367
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1649423
flow loss 0.052961856
occ loss 0.11197648
time for this batch 0.30416393280029297
----------------------------------
train loss for this epoch:  0.14018
time for this epoch 44.46618032455444
No_decrease:  20
----------------an epoch starts-------------------
i_epoch:  171
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12874214
flow loss 0.047630195
occ loss 0.08110856
time for this batch 0.35210633277893066
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16018493
flow loss 0.05448007
occ loss 0.10570101
time for this batch 0.3826580047607422
----------------------------------
train loss for this epoch:  0.140787
time for this epoch 44.75851559638977
No_decrease:  21
----------------an epoch starts-------------------
i_epoch:  172
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15234642
flow loss 0.052120056
occ loss 0.10022261
time for this batch 0.3550863265991211
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12085326
flow loss 0.045424677
occ loss 0.07542511
time for this batch 0.357712984085083
----------------------------------
train loss for this epoch:  0.140449
time for this epoch 44.28884172439575
No_decrease:  22
----------------an epoch starts-------------------
i_epoch:  173
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11413476
flow loss 0.044528272
occ loss 0.06960354
time for this batch 0.34873223304748535
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13849099
flow loss 0.051796652
occ loss 0.08669064
time for this batch 0.35308051109313965
----------------------------------
train loss for this epoch:  0.140208
time for this epoch 43.32447910308838
No_decrease:  23
----------------an epoch starts-------------------
i_epoch:  174
# batch:  96
i_batch:  0.0
the loss for this batch:  0.108738095
flow loss 0.044736966
occ loss 0.063998125
time for this batch 0.3468446731567383
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13380069
flow loss 0.0493363
occ loss 0.08446083
time for this batch 0.3649764060974121
----------------------------------
train loss for this epoch:  0.140477
time for this epoch 43.95949864387512
No_decrease:  24
----------------an epoch starts-------------------
i_epoch:  175
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15698095
flow loss 0.05489032
occ loss 0.1020868
time for this batch 0.34116244316101074
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1323297
flow loss 0.050864328
occ loss 0.08146182
time for this batch 0.3483288288116455
----------------------------------
train loss for this epoch:  0.1404
time for this epoch 43.56815552711487
No_decrease:  25
----------------an epoch starts-------------------
i_epoch:  176
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15370046
flow loss 0.053695735
occ loss 0.10000059
time for this batch 0.2912161350250244
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14084087
flow loss 0.05103545
occ loss 0.08980149
time for this batch 0.41095757484436035
----------------------------------
train loss for this epoch:  0.139888
time for this epoch 44.165300607681274
No_decrease:  26
----------------an epoch starts-------------------
i_epoch:  177
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14024651
flow loss 0.05307036
occ loss 0.08717229
time for this batch 0.30791807174682617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14275609
flow loss 0.048964925
occ loss 0.093787774
time for this batch 0.34859371185302734
----------------------------------
train loss for this epoch:  0.139742
time for this epoch 42.706565380096436
No_decrease:  27
----------------an epoch starts-------------------
i_epoch:  178
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14240879
flow loss 0.050242215
occ loss 0.09216293
time for this batch 0.3402214050292969
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13002445
flow loss 0.04637732
occ loss 0.08364364
time for this batch 0.3582756519317627
----------------------------------
train loss for this epoch:  0.140519
time for this epoch 43.94530916213989
No_decrease:  28
----------------an epoch starts-------------------
i_epoch:  179
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13324368
flow loss 0.048352346
occ loss 0.08488781
time for this batch 0.33770322799682617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15433277
flow loss 0.05520725
occ loss 0.099121876
time for this batch 0.396625280380249
----------------------------------
train loss for this epoch:  0.140337
time for this epoch 43.624903202056885
No_decrease:  29
----------------an epoch starts-------------------
i_epoch:  180
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16184838
flow loss 0.055130765
occ loss 0.10671375
time for this batch 0.3152937889099121
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12494494
flow loss 0.047416028
occ loss 0.07752556
time for this batch 0.40226030349731445
----------------------------------
train loss for this epoch:  0.139922
time for this epoch 43.41932439804077
Early stop at the 181-th epoch

7: apply the model to vali and test¶

In [14]:
def apply_to_vali_test(model, vt, f_o_mean_std):
    f = vt["flow"]
    f_m = vt["flow_mask"].to(device)
    o = vt["occupancy"]
    o_m = vt["occupancy_mask"].to(device)
    f_mae, f_rmse, o_mae, o_rmse  = vali_test(model, f, f_m, o, o_m, f_o_mean_std, hyper["b_s_vt"])
    print ("flow_mae", f_mae)
    print ("flow_rmse", f_rmse)
    print ("occ_mae", o_mae)
    print ("occ_rmse", o_rmse)
    return f_mae, f_rmse, o_mae, o_rmse

Validate¶

In [15]:
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
    apply_to_vali_test(trained_model, vali, f_o_mean_std)
flow_mae 40.19580209430186
flow_rmse 66.04664759429697
occ_mae 0.033748496773916094
occ_rmse 0.06769058495764328

Test¶

In [16]:
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
    apply_to_vali_test(trained_model, test, f_o_mean_std)
flow_mae 38.93398126549006
flow_rmse 63.92900128825022
occ_mae 0.029798282309958464
occ_rmse 0.06071664007998303
In [ ]:
 
In [ ]:
 
In [ ]: